Effect of UPSTM-Based
Decorrelation on Feature Discovery
Loading the
libraries
library("FRESA.CAD")
library(readxl)
library(igraph)
library(umap)
library(tsne)
library(entropy)
library(psych)
library(whitening)
library("vioplot")
library("rpart")
op <- par(no.readonly = TRUE)
pander::panderOptions('digits', 3)
pander::panderOptions('table.split.table', 400)
pander::panderOptions('keep.trailing.zeros',TRUE)
Material and
Methods
Data Source https://archive.ics.uci.edu/ml/datasets/seeds
M. Charytanowicz, J. Niewczas, P. Kulczycki, P.A. Kowalski, S.
Lukasik, S. Zak, ‘A Complete Gradient Clustering Algorithm for Features
Analysis of X-ray Images’, in: Information Technologies in Biomedicine,
Ewa Pietka, Jacek Kawa (eds.), Springer-Verlag, Berlin-Heidelberg, 2010,
pp. 15-24.
The Data
seeds <- read.delim("~/GitHub/LatentBiomarkers/Data/seeds_dataset.txt", header=FALSE)
par(cex=0.5)
featnames <- c("area",
"perimeter",
"compactness",
"length_of_kernel",
"width_of_kernel",
"asymmetry_coeff",
"length_ker_groove",
"class"
)
colnames(seeds) <- featnames
seeds$class <- 1*(seeds$class == 1)
pander::pander(table(seeds$class))
Standarize the
names for the reporting
studyName <- "Seeds"
dataframe <- seeds
outcome <- "class"
thro <- 0.80
TopVariables <- 5
cexheat = 0.45
Generaring the
report
Libraries
Some libraries
library(psych)
library(whitening)
library("vioplot")
library("rpart")
Data specs
pander::pander(c(rows=nrow(dataframe),col=ncol(dataframe)-1))
pander::pander(table(dataframe[,outcome]))
varlist <- colnames(dataframe)
varlist <- varlist[varlist != outcome]
largeSet <- length(varlist) > 1500
Scaling the
data
Scaling and removing near zero variance columns and highly
co-linear(r>0.99999) columns
### Some global cleaning
sdiszero <- apply(dataframe,2,sd) > 1.0e-16
dataframe <- dataframe[,sdiszero]
varlist <- colnames(dataframe)[colnames(dataframe) != outcome]
tokeep <- c(as.character(correlated_Remove(dataframe,varlist,thr=0.99999)),outcome)
dataframe <- dataframe[,tokeep]
varlist <- colnames(dataframe)
varlist <- varlist[varlist != outcome]
iscontinous <- sapply(apply(dataframe,2,unique),length) >= 5 ## Only variables with enough samples
dataframeScaled <- FRESAScale(dataframe,method="OrderLogit")$scaledData
The heatmap of the
data
numsub <- nrow(dataframe)
if (numsub > 1000) numsub <- 1000
if (!largeSet)
{
hm <- heatMaps(data=dataframeScaled[1:numsub,],
Outcome=outcome,
Scale=TRUE,
hCluster = "row",
xlab="Feature",
ylab="Sample",
srtCol=45,
srtRow=45,
cexCol=cexheat,
cexRow=cexheat
)
par(op)
}

Correlation
Matrix of the Data
The heat map of the data
if (!largeSet)
{
par(cex=0.6,cex.main=0.85,cex.axis=0.7)
#cormat <- Rfast::cora(as.matrix(dataframe[,varlist]),large=TRUE)
cormat <- cor(dataframe[,varlist],method="pearson")
cormat[is.na(cormat)] <- 0
gplots::heatmap.2(abs(cormat),
trace = "none",
# scale = "row",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Original Correlation",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Pearson Correlation|",
xlab="Feature", ylab="Feature")
diag(cormat) <- 0
print(max(abs(cormat)))
}
[1]
0.9943409
The
decorrelation
DEdataframe <- IDeA(dataframe,verbose=TRUE,thr=thro)
#>
#> Included: 7 , Uni p: 0.03711537 , Uncorrelated Base: 2 , Outcome-Driven Size: 0 , Base Size: 2
#>
#>
1 <R=0.994,r=0.972,N= 3>, Top: 1( 1 )[ 1 : 1 Fa= 1 : 0.972 ]( 1 , 1 , 0 ),<|>Tot Used: 2 , Added: 1 , Zero Std: 0 , Max Cor: 0.971
#>
2 <R=0.971,r=0.935,N= 3>, Top: 1( 2 )[ 1 : 1 Fa= 1 : 0.935 ]( 1 , 2 , 1 ),<|>Tot Used: 4 , Added: 2 , Zero Std: 0 , Max Cor: 0.864
#>
3 <R=0.864,r=0.832,N= 4>, Top: 2( 1 )[ 1 : 2 Fa= 2 : 0.832 ]( 2 , 2 , 1 ),<|>Tot Used: 5 , Added: 2 , Zero Std: 0 , Max Cor: 0.825
#>
4 <R=0.825,r=0.800,N= 2>, Top: 1( 1 )[ 1 : 1 Fa= 2 : 0.800 ]( 1 , 1 , 2 ),<|>Tot Used: 5 , Added: 1 , Zero Std: 0 , Max Cor: 0.714
#>
5 <R=0.714,r=0.800,N= 0>
#>
[ 5 ], 0.7135263 Decor Dimension: 5 Nused: 5 . Cor to Base: 4 , ABase: 1 , Outcome Base: 0
#>
varlistc <- colnames(DEdataframe)[colnames(DEdataframe) != outcome]
pander::pander(sum(apply(dataframe[,varlist],2,var)))
13
pander::pander(sum(apply(DEdataframe[,varlistc],2,var)))
10.8
pander::pander(entropy(discretize(unlist(dataframe[,varlist]), 256)))
4.55
pander::pander(entropy(discretize(unlist(DEdataframe[,varlistc]), 256)))
3.73
The decorrelation
matrix
if (!largeSet)
{
par(cex=0.6,cex.main=0.85,cex.axis=0.7)
UPSTM <- attr(DEdataframe,"UPSTM")
gplots::heatmap.2(1.0*(abs(UPSTM)>0),
trace = "none",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Decorrelation matrix",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Beta|>0",
xlab="Output Feature", ylab="Input Feature")
par(op)
}

The correlation
matrix after decorrelation
if (!largeSet)
{
cormat <- cor(DEdataframe[,varlistc],method="pearson")
cormat[is.na(cormat)] <- 0
gplots::heatmap.2(abs(cormat),
trace = "none",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Correlation after IDeA",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Pearson Correlation|",
xlab="Feature", ylab="Feature")
par(op)
diag(cormat) <- 0
print(max(abs(cormat)))
}
[1]
0.7135263
U-MAP Visualization
of features
The UMAP based on
LASSO on Raw Data
if (nrow(dataframe) < 1000)
{
classes <- unique(dataframe[1:numsub,outcome])
raincolors <- rainbow(length(classes))
names(raincolors) <- classes
datasetframe.umap = umap(scale(dataframe[1:numsub,varlist]),n_components=2)
plot(datasetframe.umap$layout,xlab="U1",ylab="U2",main="UMAP: Original",t='n')
text(datasetframe.umap$layout,labels=dataframe[1:numsub,outcome],col=raincolors[dataframe[1:numsub,outcome]+1])
}

The decorralted
UMAP
if (nrow(dataframe) < 1000)
{
datasetframe.umap = umap(scale(DEdataframe[1:numsub,varlistc]),n_components=2)
plot(datasetframe.umap$layout,xlab="U1",ylab="U2",main="UMAP: After IDeA",t='n')
text(datasetframe.umap$layout,labels=DEdataframe[1:numsub,outcome],col=raincolors[DEdataframe[1:numsub,outcome]+1])
}

Univariate
Analysis
Univariate
univarRAW <- uniRankVar(varlist,
paste(outcome,"~1"),
outcome,
dataframe,
rankingTest="AUC")
univarDe <- uniRankVar(varlistc,
paste(outcome,"~1"),
outcome,
DEdataframe,
rankingTest="AUC",
)
Final Table
univariate_columns <- c("caseMean","caseStd","controlMean","controlStd","controlKSP","ROCAUC")
##top variables
topvar <- c(1:length(varlist)) <= TopVariables
tableRaw <- univarRAW$orderframe[topvar,univariate_columns]
pander::pander(tableRaw)
| asymmetry_coeff |
2.67 |
1.1739 |
4.217 |
1.3818 |
9.80e-01 |
0.810 |
| length_ker_groove |
5.09 |
0.2637 |
5.569 |
0.5009 |
1.98e-03 |
0.764 |
| compactness |
0.88 |
0.0162 |
0.866 |
0.0254 |
5.69e-01 |
0.653 |
| length_of_kernel |
5.51 |
0.2315 |
5.689 |
0.5075 |
3.68e-04 |
0.562 |
| perimeter |
14.29 |
0.5766 |
14.692 |
1.5318 |
2.01e-05 |
0.524 |
topLAvar <- univarDe$orderframe$Name[str_detect(univarDe$orderframe$Name,"La_")]
topLAvar <- unique(c(univarDe$orderframe$Name[topvar],topLAvar[1:as.integer(TopVariables/2)]))
finalTable <- univarDe$orderframe[topLAvar,univariate_columns]
pander::pander(finalTable)
| La_length_ker_groove |
3.00 |
0.1832 |
3.365 |
0.1729 |
0.849 |
0.927 |
| asymmetry_coeff |
2.67 |
1.1739 |
4.217 |
1.3818 |
0.980 |
0.810 |
| La_width_of_kernel |
3.29 |
0.0455 |
3.255 |
0.0493 |
0.939 |
0.712 |
| compactness |
0.88 |
0.0162 |
0.866 |
0.0254 |
0.569 |
0.653 |
| La_length_of_kernel |
3.43 |
0.1286 |
3.504 |
0.1377 |
0.993 |
0.637 |
dc <- getLatentCoefficients(DEdataframe)
fscores <- attr(DEdataframe,"fscore")
pander::pander(c(mean=mean(sapply(dc,length)),total=length(dc),fraction=length(dc)/(ncol(dataframe)-1)))
theCharformulas <- attr(dc,"LatentCharFormulas")
finalTable <- rbind(finalTable,tableRaw[topvar[!(topvar %in% topLAvar)],univariate_columns])
orgnamez <- rownames(finalTable)
orgnamez <- str_remove_all(orgnamez,"La_")
finalTable$RAWAUC <- univarRAW$orderframe[orgnamez,"ROCAUC"]
finalTable$DecorFormula <- theCharformulas[rownames(finalTable)]
finalTable$fscores <- fscores[rownames(finalTable)]
Final_Columns <- c("DecorFormula","caseMean","caseStd","controlMean","controlStd","controlKSP","ROCAUC","RAWAUC","fscores")
finalTable <- finalTable[order(-finalTable$ROCAUC),]
pander::pander(finalTable[,Final_Columns])
| La_length_ker_groove |
- (0.146)area + length_ker_groove |
3.00 |
0.1832 |
3.365 |
0.1729 |
8.49e-01 |
0.927 |
0.764 |
-1 |
| asymmetry_coeff |
NA |
2.67 |
1.1739 |
4.217 |
1.3818 |
9.80e-01 |
0.810 |
0.810 |
NA |
| asymmetry_coeff1 |
NA |
2.67 |
1.1739 |
4.217 |
1.3818 |
9.80e-01 |
0.810 |
NA |
NA |
| length_ker_groove |
NA |
5.09 |
0.2637 |
5.569 |
0.5009 |
1.98e-03 |
0.764 |
0.764 |
NA |
| La_width_of_kernel |
- (0.204)area + (0.540)length_of_kernel +
width_of_kernel |
3.29 |
0.0455 |
3.255 |
0.0493 |
9.39e-01 |
0.712 |
0.501 |
-2 |
| compactness |
NA |
0.88 |
0.0162 |
0.866 |
0.0254 |
5.69e-01 |
0.653 |
0.653 |
NA |
| compactness1 |
NA |
0.88 |
0.0162 |
0.866 |
0.0254 |
5.69e-01 |
0.653 |
NA |
NA |
| La_length_of_kernel |
- (0.145)area + length_of_kernel |
3.43 |
0.1286 |
3.504 |
0.1377 |
9.93e-01 |
0.637 |
0.562 |
1 |
| length_of_kernel |
NA |
5.51 |
0.2315 |
5.689 |
0.5075 |
3.68e-04 |
0.562 |
0.562 |
NA |
| perimeter |
NA |
14.29 |
0.5766 |
14.692 |
1.5318 |
2.01e-05 |
0.524 |
0.524 |
NA |
Comparing IDeA vs
PCA vs EFA
PCA
featuresnames <- colnames(dataframe)[colnames(dataframe) != outcome]
pc <- prcomp(dataframe[,iscontinous],center = TRUE,scale. = TRUE) #principal components
predPCA <- predict(pc,dataframe[,iscontinous])
PCAdataframe <- as.data.frame(cbind(predPCA,dataframe[,!iscontinous]))
colnames(PCAdataframe) <- c(colnames(predPCA),colnames(dataframe)[!iscontinous])
#plot(PCAdataframe[,colnames(PCAdataframe)!=outcome],col=dataframe[,outcome],cex=0.65,cex.lab=0.5,cex.axis=0.75,cex.sub=0.5,cex.main=0.75)
#pander::pander(pc$rotation)
PCACor <- cor(PCAdataframe[,colnames(PCAdataframe) != outcome])
gplots::heatmap.2(abs(PCACor),
trace = "none",
# scale = "row",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "PCA Correlation",
cexRow = 0.5,
cexCol = 0.5,
srtCol=45,
srtRow= -45,
key.title=NA,
key.xlab="Pearson Correlation",
xlab="Feature", ylab="Feature")

EFA
EFAdataframe <- dataframeScaled
if (length(iscontinous) < 2000)
{
topred <- min(length(iscontinous),nrow(dataframeScaled),ncol(predPCA)/2)
if (topred < 2) topred <- 2
uls <- fa(dataframeScaled[,iscontinous],nfactors=topred,rotate="varimax",warnings=FALSE) # EFA analysis
predEFA <- predict(uls,dataframeScaled[,iscontinous])
EFAdataframe <- as.data.frame(cbind(predEFA,dataframeScaled[,!iscontinous]))
colnames(EFAdataframe) <- c(colnames(predEFA),colnames(dataframeScaled)[!iscontinous])
EFACor <- cor(EFAdataframe[,colnames(EFAdataframe) != outcome])
gplots::heatmap.2(abs(EFACor),
trace = "none",
# scale = "row",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "EFA Correlation",
cexRow = 0.5,
cexCol = 0.5,
srtCol=45,
srtRow= -45,
key.title=NA,
key.xlab="Pearson Correlation",
xlab="Feature", ylab="Feature")
}

Effect on CAR
modeling
par(op)
par(xpd = TRUE)
dataframe[,outcome] <- factor(dataframe[,outcome])
rawmodel <- rpart(paste(outcome,"~."),dataframe,control=rpart.control(maxdepth=3))
pr <- predict(rawmodel,dataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(rawmodel,main="Raw",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(rawmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,dataframe[,outcome]==0))
}

pander::pander(table(dataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.871 |
0.818 |
0.914 |
| 3 |
se |
0.700 |
0.579 |
0.804 |
| 4 |
sp |
0.957 |
0.909 |
0.984 |
| 6 |
diag.or |
52.111 |
19.864 |
136.710 |
par(op)
par(xpd = TRUE)
DEdataframe[,outcome] <- factor(DEdataframe[,outcome])
IDeAmodel <- rpart(paste(outcome,"~."),DEdataframe,control=rpart.control(maxdepth=3))
pr <- predict(IDeAmodel,DEdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(IDeAmodel,main="IDeA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(IDeAmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,DEdataframe[,outcome]==0))
}

pander::pander(table(DEdataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.919 |
0.874 |
0.952 |
| 3 |
se |
0.900 |
0.805 |
0.959 |
| 4 |
sp |
0.929 |
0.873 |
0.965 |
| 6 |
diag.or |
117.000 |
42.543 |
321.768 |
par(op)
par(xpd = TRUE)
PCAdataframe[,outcome] <- factor(PCAdataframe[,outcome])
PCAmodel <- rpart(paste(outcome,"~."),PCAdataframe,control=rpart.control(maxdepth=3))
pr <- predict(PCAmodel,PCAdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(PCAmodel,main="PCA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(PCAmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,PCAdataframe[,outcome]==0))
}

pander::pander(table(PCAdataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.905 |
0.857 |
0.941 |
| 3 |
se |
0.843 |
0.736 |
0.919 |
| 4 |
sp |
0.936 |
0.881 |
0.970 |
| 6 |
diag.or |
78.071 |
30.711 |
198.465 |
par(op)
EFA
EFAdataframe[,outcome] <- factor(EFAdataframe[,outcome])
EFAmodel <- rpart(paste(outcome,"~."),EFAdataframe,control=rpart.control(maxdepth=3))
pr <- predict(EFAmodel,EFAdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(EFAmodel,main="EFA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(EFAmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,EFAdataframe[,outcome]==0))
}

pander::pander(table(EFAdataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.910 |
0.862 |
0.945 |
| 3 |
se |
0.829 |
0.720 |
0.908 |
| 4 |
sp |
0.950 |
0.900 |
0.980 |
| 6 |
diag.or |
91.833 |
34.403 |
245.137 |
par(op)